In [3]:
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import os
import warnings
warnings.filterwarnings('ignore')
In [4]:
print (os.getcwd())
C:\Users\DELL
In [5]:
os.chdir ('C:\\project\\Projects\\PRJ Car Price Prediction')
print (os.getcwd())
C:\project\Projects\PRJ Car Price Prediction
In [6]:
df=pd.read_csv("audi.csv")
display(df)
model year price transmission mileage fuelType tax mpg engineSize
0 A1 2017 12500 Manual 15735 Petrol 150 55.4 1.4
1 A6 2016 16500 Automatic 36203 Diesel 20 64.2 2.0
2 A1 2016 11000 Manual 29946 Petrol 30 55.4 1.4
3 A4 2017 16800 Automatic 25952 Diesel 145 67.3 2.0
4 A3 2019 17300 Manual 1998 Petrol 145 49.6 1.0
... ... ... ... ... ... ... ... ... ...
10663 A3 2020 16999 Manual 4018 Petrol 145 49.6 1.0
10664 A3 2020 16999 Manual 1978 Petrol 150 49.6 1.0
10665 A3 2020 17199 Manual 609 Petrol 150 49.6 1.0
10666 Q3 2017 19499 Automatic 8646 Petrol 150 47.9 1.4
10667 Q3 2016 15999 Manual 11855 Petrol 150 47.9 1.4

10668 rows × 9 columns

In [7]:
import pandas_profiling as pf
display(pf.ProfileReport(df))
Summarize dataset:   0%|          | 0/5 [00:00<?, ?it/s]
Generate report structure:   0%|          | 0/1 [00:00<?, ?it/s]
Render HTML:   0%|          | 0/1 [00:00<?, ?it/s]

In [8]:
import pandas_profiling as pf
display(pf.ProfileReport(df))
Summarize dataset:   0%|          | 0/5 [00:00<?, ?it/s]
Generate report structure:   0%|          | 0/1 [00:00<?, ?it/s]
Render HTML:   0%|          | 0/1 [00:00<?, ?it/s]

In [9]:
print (len(df))
10668
In [10]:
display (df.shape)
(10668, 9)
In [11]:
display (df.dtypes )
model            object
year              int64
price             int64
transmission     object
mileage           int64
fuelType         object
tax               int64
mpg             float64
engineSize      float64
dtype: object
In [12]:
display (df.isna().sum() )
model           0
year            0
price           0
transmission    0
mileage         0
fuelType        0
tax             0
mpg             0
engineSize      0
dtype: int64
In [13]:
print (df.info())
<class 'pandas.core.frame.DataFrame'>
RangeIndex: 10668 entries, 0 to 10667
Data columns (total 9 columns):
 #   Column        Non-Null Count  Dtype  
---  ------        --------------  -----  
 0   model         10668 non-null  object 
 1   year          10668 non-null  int64  
 2   price         10668 non-null  int64  
 3   transmission  10668 non-null  object 
 4   mileage       10668 non-null  int64  
 5   fuelType      10668 non-null  object 
 6   tax           10668 non-null  int64  
 7   mpg           10668 non-null  float64
 8   engineSize    10668 non-null  float64
dtypes: float64(2), int64(4), object(3)
memory usage: 750.2+ KB
None
In [14]:
display (df.describe ())
year price mileage tax mpg engineSize
count 10668.000000 10668.000000 10668.000000 10668.000000 10668.000000 10668.000000
mean 2017.100675 22896.685039 24827.244001 126.011436 50.770022 1.930709
std 2.167494 11714.841888 23505.257205 67.170294 12.949782 0.602957
min 1997.000000 1490.000000 1.000000 0.000000 18.900000 0.000000
25% 2016.000000 15130.750000 5968.750000 125.000000 40.900000 1.500000
50% 2017.000000 20200.000000 19000.000000 145.000000 49.600000 2.000000
75% 2019.000000 27990.000000 36464.500000 145.000000 58.900000 2.000000
max 2020.000000 145000.000000 323000.000000 580.000000 188.300000 6.300000
In [15]:
X = df.iloc[:,[0,1,3,4,5,6,7,8]].values
display (X.shape)
display (X)
(10668, 8)
array([[' A1', 2017, 'Manual', ..., 150, 55.4, 1.4],
       [' A6', 2016, 'Automatic', ..., 20, 64.2, 2.0],
       [' A1', 2016, 'Manual', ..., 30, 55.4, 1.4],
       ...,
       [' A3', 2020, 'Manual', ..., 150, 49.6, 1.0],
       [' Q3', 2017, 'Automatic', ..., 150, 47.9, 1.4],
       [' Q3', 2016, 'Manual', ..., 150, 47.9, 1.4]], dtype=object)
In [16]:
Y = df.iloc[:,[2]].values
display (Y.shape)
display (Y)
(10668, 1)
array([[12500],
       [16500],
       [11000],
       ...,
       [17199],
       [19499],
       [15999]], dtype=int64)
In [17]:
display(pd.DataFrame(X).head(5))
0 1 2 3 4 5 6 7
0 A1 2017 Manual 15735 Petrol 150 55.4 1.4
1 A6 2016 Automatic 36203 Diesel 20 64.2 2.0
2 A1 2016 Manual 29946 Petrol 30 55.4 1.4
3 A4 2017 Automatic 25952 Diesel 145 67.3 2.0
4 A3 2019 Manual 1998 Petrol 145 49.6 1.0
In [18]:
from sklearn.preprocessing import LabelEncoder
le1 = LabelEncoder()
X[:,0] = le1.fit_transform(X[:,0])
le2 = LabelEncoder()
X[:,-4] = le2.fit_transform(X[:,-4])
display (X)
array([[0, 2017, 'Manual', ..., 150, 55.4, 1.4],
       [5, 2016, 'Automatic', ..., 20, 64.2, 2.0],
       [0, 2016, 'Manual', ..., 30, 55.4, 1.4],
       ...,
       [2, 2020, 'Manual', ..., 150, 49.6, 1.0],
       [9, 2017, 'Automatic', ..., 150, 47.9, 1.4],
       [9, 2016, 'Manual', ..., 150, 47.9, 1.4]], dtype=object)
In [19]:
from sklearn.preprocessing import OneHotEncoder
from sklearn.compose import ColumnTransformer
ct = ColumnTransformer(transformers = [('encoder',OneHotEncoder(),[2])],remainder='passthrough')
X = ct.fit_transform(X)
display (X.shape)
display (pd.DataFrame(X))
(10668, 10)
0 1 2 3 4 5 6 7 8 9
0 0.0 1.0 0.0 0 2017 15735 2 150 55.4 1.4
1 1.0 0.0 0.0 5 2016 36203 0 20 64.2 2.0
2 0.0 1.0 0.0 0 2016 29946 2 30 55.4 1.4
3 1.0 0.0 0.0 3 2017 25952 0 145 67.3 2.0
4 0.0 1.0 0.0 2 2019 1998 2 145 49.6 1.0
... ... ... ... ... ... ... ... ... ... ...
10663 0.0 1.0 0.0 2 2020 4018 2 145 49.6 1.0
10664 0.0 1.0 0.0 2 2020 1978 2 150 49.6 1.0
10665 0.0 1.0 0.0 2 2020 609 2 150 49.6 1.0
10666 1.0 0.0 0.0 9 2017 8646 2 150 47.9 1.4
10667 0.0 1.0 0.0 9 2016 11855 2 150 47.9 1.4

10668 rows × 10 columns

In [20]:
display (pd.DataFrame(X))
0 1 2 3 4 5 6 7 8 9
0 0.0 1.0 0.0 0 2017 15735 2 150 55.4 1.4
1 1.0 0.0 0.0 5 2016 36203 0 20 64.2 2.0
2 0.0 1.0 0.0 0 2016 29946 2 30 55.4 1.4
3 1.0 0.0 0.0 3 2017 25952 0 145 67.3 2.0
4 0.0 1.0 0.0 2 2019 1998 2 145 49.6 1.0
... ... ... ... ... ... ... ... ... ... ...
10663 0.0 1.0 0.0 2 2020 4018 2 145 49.6 1.0
10664 0.0 1.0 0.0 2 2020 1978 2 150 49.6 1.0
10665 0.0 1.0 0.0 2 2020 609 2 150 49.6 1.0
10666 1.0 0.0 0.0 9 2017 8646 2 150 47.9 1.4
10667 0.0 1.0 0.0 9 2016 11855 2 150 47.9 1.4

10668 rows × 10 columns

In [21]:
from sklearn.preprocessing import StandardScaler
sc = StandardScaler()
X = sc.fit_transform(X)
display (pd.DataFrame(X))
0 1 2 3 4 5 6 7 8 9
0 -0.583268 1.200728 -0.712333 -1.123544 -0.046450 -0.386836 1.050783 0.357147 0.357550 -0.880218
1 1.714479 -0.832828 -0.712333 -0.160831 -0.507834 0.483989 -0.954181 -1.578323 1.037130 0.114925
2 -0.583268 1.200728 -0.712333 -1.123544 -0.507834 0.217781 1.050783 -1.429440 0.357550 -0.880218
3 1.714479 -0.832828 -0.712333 -0.545916 -0.046450 0.047853 -0.954181 0.282706 1.276528 0.114925
4 -0.583268 1.200728 -0.712333 -0.738459 0.876318 -0.971285 1.050783 0.282706 -0.090355 -1.543647
... ... ... ... ... ... ... ... ... ... ...
10663 -0.583268 1.200728 -0.712333 -0.738459 1.337702 -0.885343 1.050783 0.282706 -0.090355 -1.543647
10664 -0.583268 1.200728 -0.712333 -0.738459 1.337702 -0.972136 1.050783 0.357147 -0.090355 -1.543647
10665 -0.583268 1.200728 -0.712333 -0.738459 1.337702 -1.030381 1.050783 0.357147 -0.090355 -1.543647
10666 1.714479 -0.832828 -0.712333 0.609339 -0.046450 -0.688442 1.050783 0.357147 -0.221637 -0.880218
10667 -0.583268 1.200728 -0.712333 0.609339 -0.507834 -0.551913 1.050783 0.357147 -0.221637 -0.880218

10668 rows × 10 columns

In [22]:
from sklearn.model_selection import train_test_split
(X_train,X_test,Y_train,Y_test) = train_test_split(X,Y,test_size=0.2,random_state=0)
print (X.shape, Y.shape)
print (X_train.shape, Y_train.shape)
print (X_test.shape, Y_test.shape)
(10668, 10) (10668, 1)
(8534, 10) (8534, 1)
(2134, 10) (2134, 1)
In [23]:
from sklearn.ensemble import RandomForestRegressor
regression = RandomForestRegressor(random_state=0)
regression.fit(X_train,Y_train)
display (regression)
RandomForestRegressor(random_state=0)
In a Jupyter environment, please rerun this cell to show the HTML representation or trust the notebook.
On GitHub, the HTML representation is unable to render, please try loading this page with nbviewer.org.
RandomForestRegressor(random_state=0)
In [24]:
y_pred = regression.predict(X_test)
display (y_pred)
array([14337.15, 23450.35, 27330.07, ..., 46275.18, 31359.  ,  9929.62])
In [25]:
print(np.concatenate((y_pred.reshape(len(y_pred),1),Y_test.reshape(len(Y_test),1)),1))
[[14337.15 14998.  ]
 [23450.35 21950.  ]
 [27330.07 28990.  ]
 ...
 [46275.18 45995.  ]
 [31359.   30500.  ]
 [ 9929.62  8400.  ]]
In [26]:
from sklearn.metrics import r2_score,mean_absolute_error
print  ('R2 Score ', r2_score(Y_test, y_pred))
print  ('Mean Absolute Error', mean_absolute_error(Y_test,y_pred))
R2 Score  0.9536134841307546
Mean Absolute Error 1538.730980670462
In [27]:
from sklearn.linear_model import LinearRegression
reg = LinearRegression()
reg.fit(X_train,Y_train)
print(reg)
LinearRegression()
In [28]:
y_pred = reg.predict(X_test)
display (y_pred)
array([[13121.75937793],
       [29393.75937793],
       [31873.75937793],
       ...,
       [42565.75937793],
       [31485.75937793],
       [ 7377.75937793]])
In [29]:
print(np.concatenate((y_pred.reshape(len(y_pred),1),Y_test.reshape(len(Y_test),1)),1))
[[13121.75937793 14998.        ]
 [29393.75937793 21950.        ]
 [31873.75937793 28990.        ]
 ...
 [42565.75937793 45995.        ]
 [31485.75937793 30500.        ]
 [ 7377.75937793  8400.        ]]
In [30]:
from sklearn.metrics import r2_score,mean_absolute_error
print  ('R2 Score ', r2_score(Y_test, y_pred))
print  ('Mean Absolute Error', mean_absolute_error(Y_test,y_pred))
R2 Score  0.7915565638111468
Mean Absolute Error 3379.9270826343727
In [31]:
y_pred = reg.predict(X)
display (y_pred)
array([[14681.75937793],
       [20633.75937793],
       [13881.75937793],
       ...,
       [19449.75937793],
       [20965.75937793],
       [16737.75937793]])
In [32]:
result = pd.concat([df,pd.DataFrame(y_pred)],axis=1)
display( result)
model year price transmission mileage fuelType tax mpg engineSize 0
0 A1 2017 12500 Manual 15735 Petrol 150 55.4 1.4 14681.759378
1 A6 2016 16500 Automatic 36203 Diesel 20 64.2 2.0 20633.759378
2 A1 2016 11000 Manual 29946 Petrol 30 55.4 1.4 13881.759378
3 A4 2017 16800 Automatic 25952 Diesel 145 67.3 2.0 19885.759378
4 A3 2019 17300 Manual 1998 Petrol 145 49.6 1.0 17397.759378
... ... ... ... ... ... ... ... ... ... ...
10663 A3 2020 16999 Manual 4018 Petrol 145 49.6 1.0 19237.759378
10664 A3 2020 16999 Manual 1978 Petrol 150 49.6 1.0 19321.759378
10665 A3 2020 17199 Manual 609 Petrol 150 49.6 1.0 19449.759378
10666 Q3 2017 19499 Automatic 8646 Petrol 150 47.9 1.4 20965.759378
10667 Q3 2016 15999 Manual 11855 Petrol 150 47.9 1.4 16737.759378

10668 rows × 10 columns

In [33]:
from sklearn.ensemble import  ExtraTreesRegressor
ET_Model=ExtraTreesRegressor(n_estimators = 120)
ET_Model.fit(X_train,Y_train)
y_predict=ET_Model.predict(X_test)
from sklearn.metrics import r2_score,mean_absolute_error
print  ('R2 Score ', r2_score(Y_test, y_predict))
print  ('Mean Absolute Error', mean_absolute_error(Y_test,y_predict))
R2 Score  0.95530840142209
Mean Absolute Error 1544.252601530772
In [39]:
y_pred = reg.predict(X)
display (y_pred)
result = pd.concat([df,pd.DataFrame(y_pred)],axis=1)
display( result)
array([[14681.75937793],
       [20633.75937793],
       [13881.75937793],
       ...,
       [19449.75937793],
       [20965.75937793],
       [16737.75937793]])
model year price transmission mileage fuelType tax mpg engineSize 0
0 A1 2017 12500 Manual 15735 Petrol 150 55.4 1.4 14681.759378
1 A6 2016 16500 Automatic 36203 Diesel 20 64.2 2.0 20633.759378
2 A1 2016 11000 Manual 29946 Petrol 30 55.4 1.4 13881.759378
3 A4 2017 16800 Automatic 25952 Diesel 145 67.3 2.0 19885.759378
4 A3 2019 17300 Manual 1998 Petrol 145 49.6 1.0 17397.759378
... ... ... ... ... ... ... ... ... ... ...
10663 A3 2020 16999 Manual 4018 Petrol 145 49.6 1.0 19237.759378
10664 A3 2020 16999 Manual 1978 Petrol 150 49.6 1.0 19321.759378
10665 A3 2020 17199 Manual 609 Petrol 150 49.6 1.0 19449.759378
10666 Q3 2017 19499 Automatic 8646 Petrol 150 47.9 1.4 20965.759378
10667 Q3 2016 15999 Manual 11855 Petrol 150 47.9 1.4 16737.759378

10668 rows × 10 columns

In [35]:
from sklearn.model_selection import RandomizedSearchCV
n_estimators = [int(x) for x in np.linspace(start = 80, stop = 1500, num = 10)]
max_features = ['auto', 'sqrt']
max_depth = [int(x) for x in np.linspace(6, 45, num = 5)]
min_samples_split = [2, 5, 10, 15, 100]
min_samples_leaf = [1, 2, 5, 10]

# create random grid

rand_grid={'n_estimators': n_estimators,
               'max_features': max_features,
               'max_depth': max_depth,
               'min_samples_split': min_samples_split,
               'min_samples_leaf': min_samples_leaf}

rf=RandomForestRegressor()

rCV=RandomizedSearchCV(estimator=rf,param_distributions=rand_grid,scoring='neg_mean_squared_error',n_iter=3,cv=3,random_state=42, n_jobs = 1)
In [42]:
rf_pred=rCV.predict(X_test)
display (rf_pred)
array([14058.87990831, 23737.34462538, 28473.71003631, ...,
       48104.76434693, 31300.69766831, 10037.72500503])
In [43]:
from sklearn.metrics import mean_absolute_error,mean_squared_error
print('MAE',mean_absolute_error(Y_test,rf_pred))
print('MSE',mean_squared_error(Y_test,rf_pred))
MAE 1502.6607191516405
MSE 5695470.401678256
In [44]:
display (r2_score(Y_test,rf_pred))
0.9585530804600272
In [45]:
from catboost import CatBoostRegressor
cat=CatBoostRegressor()
print (cat.fit(X_train,Y_train))
Learning rate set to 0.057452
0:	learn: 11184.0694779	total: 161ms	remaining: 2m 40s
1:	learn: 10690.2138498	total: 168ms	remaining: 1m 23s
2:	learn: 10221.1011877	total: 173ms	remaining: 57.7s
3:	learn: 9778.8067263	total: 179ms	remaining: 44.5s
4:	learn: 9382.6454054	total: 183ms	remaining: 36.5s
5:	learn: 8989.9290587	total: 191ms	remaining: 31.6s
6:	learn: 8628.4892653	total: 199ms	remaining: 28.2s
7:	learn: 8280.9179062	total: 206ms	remaining: 25.5s
8:	learn: 7950.5476053	total: 213ms	remaining: 23.5s
9:	learn: 7636.7735365	total: 223ms	remaining: 22.1s
10:	learn: 7349.6084699	total: 232ms	remaining: 20.8s
11:	learn: 7084.0457263	total: 238ms	remaining: 19.6s
12:	learn: 6842.7060927	total: 245ms	remaining: 18.6s
13:	learn: 6608.0244743	total: 251ms	remaining: 17.7s
14:	learn: 6387.7285726	total: 258ms	remaining: 16.9s
15:	learn: 6170.1354894	total: 264ms	remaining: 16.2s
16:	learn: 5973.9133066	total: 270ms	remaining: 15.6s
17:	learn: 5780.4713044	total: 277ms	remaining: 15.1s
18:	learn: 5611.7289090	total: 283ms	remaining: 14.6s
19:	learn: 5442.3513779	total: 289ms	remaining: 14.2s
20:	learn: 5286.7176367	total: 296ms	remaining: 13.8s
21:	learn: 5137.6512896	total: 302ms	remaining: 13.4s
22:	learn: 4990.7923670	total: 308ms	remaining: 13.1s
23:	learn: 4861.3009833	total: 313ms	remaining: 12.7s
24:	learn: 4739.6194386	total: 319ms	remaining: 12.4s
25:	learn: 4618.7487999	total: 325ms	remaining: 12.2s
26:	learn: 4506.4548775	total: 332ms	remaining: 12s
27:	learn: 4401.2065904	total: 338ms	remaining: 11.7s
28:	learn: 4305.6257343	total: 344ms	remaining: 11.5s
29:	learn: 4207.0507760	total: 350ms	remaining: 11.3s
30:	learn: 4124.0046256	total: 356ms	remaining: 11.1s
31:	learn: 4040.1936249	total: 362ms	remaining: 10.9s
32:	learn: 3968.0976460	total: 368ms	remaining: 10.8s
33:	learn: 3896.9106971	total: 383ms	remaining: 10.9s
34:	learn: 3827.6695910	total: 396ms	remaining: 10.9s
35:	learn: 3764.4378792	total: 404ms	remaining: 10.8s
36:	learn: 3703.8876617	total: 410ms	remaining: 10.7s
37:	learn: 3642.1855579	total: 418ms	remaining: 10.6s
38:	learn: 3593.4131916	total: 426ms	remaining: 10.5s
39:	learn: 3541.6799962	total: 432ms	remaining: 10.4s
40:	learn: 3493.4081900	total: 439ms	remaining: 10.3s
41:	learn: 3453.4874803	total: 446ms	remaining: 10.2s
42:	learn: 3410.5308321	total: 452ms	remaining: 10.1s
43:	learn: 3370.6809022	total: 458ms	remaining: 9.95s
44:	learn: 3333.4757971	total: 465ms	remaining: 9.86s
45:	learn: 3296.7604576	total: 471ms	remaining: 9.77s
46:	learn: 3264.0377013	total: 478ms	remaining: 9.7s
47:	learn: 3232.4741430	total: 485ms	remaining: 9.61s
48:	learn: 3196.7546158	total: 491ms	remaining: 9.53s
49:	learn: 3169.5785953	total: 497ms	remaining: 9.45s
50:	learn: 3145.2595753	total: 503ms	remaining: 9.37s
51:	learn: 3121.9559750	total: 510ms	remaining: 9.29s
52:	learn: 3103.9335304	total: 516ms	remaining: 9.22s
53:	learn: 3081.5474184	total: 523ms	remaining: 9.16s
54:	learn: 3058.2078780	total: 529ms	remaining: 9.1s
55:	learn: 3040.6918309	total: 534ms	remaining: 8.99s
56:	learn: 3023.6582508	total: 539ms	remaining: 8.91s
57:	learn: 3007.3255135	total: 543ms	remaining: 8.82s
58:	learn: 2990.4811558	total: 547ms	remaining: 8.73s
59:	learn: 2972.3283312	total: 552ms	remaining: 8.65s
60:	learn: 2959.7431445	total: 557ms	remaining: 8.57s
61:	learn: 2946.5923636	total: 561ms	remaining: 8.48s
62:	learn: 2934.9869357	total: 564ms	remaining: 8.39s
63:	learn: 2920.6625964	total: 568ms	remaining: 8.31s
64:	learn: 2905.9913604	total: 574ms	remaining: 8.26s
65:	learn: 2890.8150557	total: 582ms	remaining: 8.24s
66:	learn: 2881.8105155	total: 587ms	remaining: 8.18s
67:	learn: 2869.9838674	total: 592ms	remaining: 8.11s
68:	learn: 2856.9685778	total: 598ms	remaining: 8.06s
69:	learn: 2847.6677815	total: 611ms	remaining: 8.11s
70:	learn: 2838.3074544	total: 616ms	remaining: 8.06s
71:	learn: 2831.5916477	total: 623ms	remaining: 8.03s
72:	learn: 2823.7402368	total: 627ms	remaining: 7.96s
73:	learn: 2817.6964617	total: 631ms	remaining: 7.9s
74:	learn: 2806.9046704	total: 637ms	remaining: 7.85s
75:	learn: 2799.2999793	total: 641ms	remaining: 7.79s
76:	learn: 2792.2531112	total: 645ms	remaining: 7.74s
77:	learn: 2786.0539054	total: 650ms	remaining: 7.68s
78:	learn: 2778.3667928	total: 657ms	remaining: 7.65s
79:	learn: 2770.9670854	total: 663ms	remaining: 7.62s
80:	learn: 2765.4085686	total: 669ms	remaining: 7.58s
81:	learn: 2758.2673921	total: 672ms	remaining: 7.53s
82:	learn: 2752.2341491	total: 676ms	remaining: 7.47s
83:	learn: 2746.5786616	total: 681ms	remaining: 7.42s
84:	learn: 2734.6379923	total: 685ms	remaining: 7.38s
85:	learn: 2730.6071445	total: 690ms	remaining: 7.33s
86:	learn: 2724.7246149	total: 694ms	remaining: 7.28s
87:	learn: 2721.0052457	total: 698ms	remaining: 7.24s
88:	learn: 2712.3204256	total: 703ms	remaining: 7.19s
89:	learn: 2706.8884195	total: 710ms	remaining: 7.17s
90:	learn: 2700.6465881	total: 715ms	remaining: 7.15s
91:	learn: 2695.4837286	total: 721ms	remaining: 7.11s
92:	learn: 2684.0451492	total: 726ms	remaining: 7.08s
93:	learn: 2679.3709509	total: 730ms	remaining: 7.04s
94:	learn: 2671.3324958	total: 736ms	remaining: 7.01s
95:	learn: 2668.2768042	total: 742ms	remaining: 6.98s
96:	learn: 2664.6017210	total: 747ms	remaining: 6.95s
97:	learn: 2661.1200451	total: 751ms	remaining: 6.91s
98:	learn: 2658.3753198	total: 755ms	remaining: 6.87s
99:	learn: 2650.2996235	total: 759ms	remaining: 6.83s
100:	learn: 2648.1326775	total: 765ms	remaining: 6.8s
101:	learn: 2645.6487331	total: 770ms	remaining: 6.78s
102:	learn: 2642.4443354	total: 776ms	remaining: 6.76s
103:	learn: 2640.1216511	total: 781ms	remaining: 6.73s
104:	learn: 2636.1391523	total: 787ms	remaining: 6.71s
105:	learn: 2632.4110278	total: 791ms	remaining: 6.67s
106:	learn: 2624.0779999	total: 795ms	remaining: 6.63s
107:	learn: 2618.9404235	total: 798ms	remaining: 6.59s
108:	learn: 2617.2460690	total: 802ms	remaining: 6.55s
109:	learn: 2612.7806768	total: 807ms	remaining: 6.53s
110:	learn: 2607.2897650	total: 811ms	remaining: 6.49s
111:	learn: 2601.5187968	total: 815ms	remaining: 6.46s
112:	learn: 2599.0174087	total: 821ms	remaining: 6.45s
113:	learn: 2595.7902363	total: 826ms	remaining: 6.42s
114:	learn: 2589.7462674	total: 831ms	remaining: 6.4s
115:	learn: 2584.1747358	total: 837ms	remaining: 6.38s
116:	learn: 2581.5784516	total: 855ms	remaining: 6.46s
117:	learn: 2573.1943706	total: 861ms	remaining: 6.44s
118:	learn: 2571.2467133	total: 867ms	remaining: 6.42s
119:	learn: 2567.9967348	total: 873ms	remaining: 6.4s
120:	learn: 2561.8072039	total: 879ms	remaining: 6.39s
121:	learn: 2557.7552506	total: 884ms	remaining: 6.36s
122:	learn: 2554.4333020	total: 888ms	remaining: 6.33s
123:	learn: 2552.8292311	total: 892ms	remaining: 6.3s
124:	learn: 2548.1769382	total: 897ms	remaining: 6.28s
125:	learn: 2547.0079747	total: 900ms	remaining: 6.24s
126:	learn: 2545.3790353	total: 904ms	remaining: 6.21s
127:	learn: 2539.6944398	total: 909ms	remaining: 6.19s
128:	learn: 2534.5374763	total: 913ms	remaining: 6.16s
129:	learn: 2529.7280897	total: 917ms	remaining: 6.13s
130:	learn: 2527.4098548	total: 920ms	remaining: 6.11s
131:	learn: 2525.7165491	total: 924ms	remaining: 6.07s
132:	learn: 2523.7838455	total: 929ms	remaining: 6.06s
133:	learn: 2520.3784007	total: 935ms	remaining: 6.04s
134:	learn: 2517.5840823	total: 941ms	remaining: 6.03s
135:	learn: 2513.1034937	total: 947ms	remaining: 6.01s
136:	learn: 2509.4022766	total: 953ms	remaining: 6s
137:	learn: 2505.3916480	total: 958ms	remaining: 5.98s
138:	learn: 2503.7477850	total: 964ms	remaining: 5.97s
139:	learn: 2502.2292847	total: 970ms	remaining: 5.96s
140:	learn: 2496.4265667	total: 978ms	remaining: 5.96s
141:	learn: 2493.7826580	total: 985ms	remaining: 5.95s
142:	learn: 2491.3225450	total: 992ms	remaining: 5.94s
143:	learn: 2487.8639855	total: 999ms	remaining: 5.93s
144:	learn: 2486.1786714	total: 1s	remaining: 5.92s
145:	learn: 2484.6798215	total: 1.01s	remaining: 5.91s
146:	learn: 2481.9012350	total: 1.01s	remaining: 5.89s
147:	learn: 2478.9312260	total: 1.02s	remaining: 5.87s
148:	learn: 2476.5372412	total: 1.02s	remaining: 5.85s
149:	learn: 2473.2546223	total: 1.03s	remaining: 5.83s
150:	learn: 2471.4471464	total: 1.03s	remaining: 5.81s
151:	learn: 2469.6409715	total: 1.04s	remaining: 5.8s
152:	learn: 2466.9193203	total: 1.04s	remaining: 5.79s
153:	learn: 2465.1844902	total: 1.05s	remaining: 5.77s
154:	learn: 2463.0499117	total: 1.05s	remaining: 5.75s
155:	learn: 2461.7827140	total: 1.06s	remaining: 5.73s
156:	learn: 2458.9977863	total: 1.06s	remaining: 5.71s
157:	learn: 2454.9587035	total: 1.07s	remaining: 5.69s
158:	learn: 2451.9873804	total: 1.07s	remaining: 5.67s
159:	learn: 2450.8561954	total: 1.08s	remaining: 5.69s
160:	learn: 2449.7476635	total: 1.09s	remaining: 5.68s
161:	learn: 2446.4227836	total: 1.09s	remaining: 5.67s
162:	learn: 2444.2555123	total: 1.1s	remaining: 5.65s
163:	learn: 2441.5688551	total: 1.1s	remaining: 5.63s
164:	learn: 2440.2986221	total: 1.11s	remaining: 5.61s
165:	learn: 2437.3232663	total: 1.11s	remaining: 5.59s
166:	learn: 2432.4649721	total: 1.11s	remaining: 5.57s
167:	learn: 2428.3970878	total: 1.12s	remaining: 5.55s
168:	learn: 2423.6253903	total: 1.13s	remaining: 5.54s
169:	learn: 2421.4159298	total: 1.14s	remaining: 5.57s
170:	learn: 2420.5105888	total: 1.15s	remaining: 5.55s
171:	learn: 2419.1046943	total: 1.15s	remaining: 5.54s
172:	learn: 2415.9697367	total: 1.16s	remaining: 5.53s
173:	learn: 2414.6048787	total: 1.16s	remaining: 5.51s
174:	learn: 2413.5476062	total: 1.17s	remaining: 5.5s
175:	learn: 2410.0292711	total: 1.17s	remaining: 5.5s
176:	learn: 2409.2012840	total: 1.18s	remaining: 5.49s
177:	learn: 2404.9199084	total: 1.19s	remaining: 5.47s
178:	learn: 2403.6642991	total: 1.19s	remaining: 5.46s
179:	learn: 2399.8983815	total: 1.2s	remaining: 5.45s
180:	learn: 2395.0973851	total: 1.2s	remaining: 5.45s
181:	learn: 2393.7417885	total: 1.21s	remaining: 5.43s
182:	learn: 2391.4422590	total: 1.21s	remaining: 5.42s
183:	learn: 2388.2745020	total: 1.22s	remaining: 5.41s
184:	learn: 2385.2843416	total: 1.23s	remaining: 5.4s
185:	learn: 2384.0606488	total: 1.23s	remaining: 5.39s
186:	learn: 2381.7253049	total: 1.24s	remaining: 5.38s
187:	learn: 2380.6296772	total: 1.24s	remaining: 5.37s
188:	learn: 2377.6691116	total: 1.25s	remaining: 5.35s
189:	learn: 2376.6098548	total: 1.25s	remaining: 5.33s
190:	learn: 2373.3608150	total: 1.25s	remaining: 5.32s
191:	learn: 2371.9139000	total: 1.26s	remaining: 5.3s
192:	learn: 2369.5546632	total: 1.26s	remaining: 5.28s
193:	learn: 2366.3459170	total: 1.27s	remaining: 5.26s
194:	learn: 2362.9178905	total: 1.27s	remaining: 5.25s
195:	learn: 2360.1859250	total: 1.28s	remaining: 5.24s
196:	learn: 2359.2590139	total: 1.28s	remaining: 5.22s
197:	learn: 2357.0284087	total: 1.29s	remaining: 5.21s
198:	learn: 2353.8146557	total: 1.29s	remaining: 5.19s
199:	learn: 2353.1600869	total: 1.29s	remaining: 5.17s
200:	learn: 2350.2201701	total: 1.3s	remaining: 5.16s
201:	learn: 2347.3400692	total: 1.3s	remaining: 5.15s
202:	learn: 2345.5937801	total: 1.31s	remaining: 5.14s
203:	learn: 2344.6418606	total: 1.31s	remaining: 5.12s
204:	learn: 2343.8773572	total: 1.31s	remaining: 5.1s
205:	learn: 2342.6759718	total: 1.32s	remaining: 5.08s
206:	learn: 2339.8990721	total: 1.32s	remaining: 5.06s
207:	learn: 2338.2296743	total: 1.33s	remaining: 5.05s
208:	learn: 2336.5341093	total: 1.33s	remaining: 5.04s
209:	learn: 2335.3146229	total: 1.34s	remaining: 5.03s
210:	learn: 2333.2988328	total: 1.34s	remaining: 5.01s
211:	learn: 2331.4510136	total: 1.35s	remaining: 5s
212:	learn: 2330.1170916	total: 1.35s	remaining: 5s
213:	learn: 2329.1335953	total: 1.36s	remaining: 4.99s
214:	learn: 2324.7341247	total: 1.37s	remaining: 5s
215:	learn: 2323.8905796	total: 1.37s	remaining: 4.99s
216:	learn: 2322.9088141	total: 1.38s	remaining: 4.97s
217:	learn: 2321.4407108	total: 1.38s	remaining: 4.97s
218:	learn: 2319.8959026	total: 1.39s	remaining: 4.96s
219:	learn: 2317.2280690	total: 1.39s	remaining: 4.95s
220:	learn: 2316.4073503	total: 1.4s	remaining: 4.94s
221:	learn: 2315.9774014	total: 1.41s	remaining: 4.93s
222:	learn: 2313.4353035	total: 1.41s	remaining: 4.92s
223:	learn: 2309.5002690	total: 1.42s	remaining: 4.92s
224:	learn: 2308.6886023	total: 1.42s	remaining: 4.91s
225:	learn: 2304.9689724	total: 1.43s	remaining: 4.9s
226:	learn: 2303.2677276	total: 1.43s	remaining: 4.89s
227:	learn: 2302.0813332	total: 1.44s	remaining: 4.88s
228:	learn: 2299.7896009	total: 1.45s	remaining: 4.88s
229:	learn: 2298.6975801	total: 1.45s	remaining: 4.87s
230:	learn: 2295.4513799	total: 1.46s	remaining: 4.86s
231:	learn: 2294.8327851	total: 1.47s	remaining: 4.85s
232:	learn: 2292.7337022	total: 1.47s	remaining: 4.84s
233:	learn: 2291.3686155	total: 1.47s	remaining: 4.82s
234:	learn: 2289.8722686	total: 1.48s	remaining: 4.82s
235:	learn: 2287.1903095	total: 1.48s	remaining: 4.8s
236:	learn: 2285.1795624	total: 1.49s	remaining: 4.8s
237:	learn: 2284.5492745	total: 1.5s	remaining: 4.79s
238:	learn: 2284.0598710	total: 1.5s	remaining: 4.78s
239:	learn: 2280.8355280	total: 1.51s	remaining: 4.77s
240:	learn: 2277.6796816	total: 1.51s	remaining: 4.77s
241:	learn: 2276.4744729	total: 1.52s	remaining: 4.76s
242:	learn: 2274.1066185	total: 1.52s	remaining: 4.75s
243:	learn: 2273.2995006	total: 1.53s	remaining: 4.74s
244:	learn: 2271.2913458	total: 1.54s	remaining: 4.74s
245:	learn: 2267.8707432	total: 1.54s	remaining: 4.74s
246:	learn: 2266.7160845	total: 1.55s	remaining: 4.73s
247:	learn: 2265.3929145	total: 1.56s	remaining: 4.73s
248:	learn: 2262.4179426	total: 1.57s	remaining: 4.73s
249:	learn: 2261.1526174	total: 1.58s	remaining: 4.74s
250:	learn: 2259.8092162	total: 1.59s	remaining: 4.74s
251:	learn: 2258.6097665	total: 1.59s	remaining: 4.72s
252:	learn: 2258.0384687	total: 1.6s	remaining: 4.71s
253:	learn: 2256.7364390	total: 1.61s	remaining: 4.72s
254:	learn: 2254.4523140	total: 1.62s	remaining: 4.72s
255:	learn: 2252.2397492	total: 1.62s	remaining: 4.71s
256:	learn: 2251.4537345	total: 1.63s	remaining: 4.7s
257:	learn: 2250.3081272	total: 1.63s	remaining: 4.69s
258:	learn: 2249.3424881	total: 1.63s	remaining: 4.68s
259:	learn: 2248.7847665	total: 1.64s	remaining: 4.67s
260:	learn: 2246.8981136	total: 1.65s	remaining: 4.66s
261:	learn: 2244.2123674	total: 1.65s	remaining: 4.65s
262:	learn: 2242.6369237	total: 1.66s	remaining: 4.64s
263:	learn: 2241.6767611	total: 1.66s	remaining: 4.63s
264:	learn: 2241.2817960	total: 1.66s	remaining: 4.61s
265:	learn: 2238.2773409	total: 1.67s	remaining: 4.6s
266:	learn: 2236.9083606	total: 1.67s	remaining: 4.6s
267:	learn: 2236.5013125	total: 1.68s	remaining: 4.59s
268:	learn: 2235.3727747	total: 1.69s	remaining: 4.58s
269:	learn: 2233.4085769	total: 1.69s	remaining: 4.57s
270:	learn: 2231.8458786	total: 1.7s	remaining: 4.56s
271:	learn: 2231.2707133	total: 1.7s	remaining: 4.55s
272:	learn: 2230.1912489	total: 1.71s	remaining: 4.55s
273:	learn: 2227.2714177	total: 1.71s	remaining: 4.54s
274:	learn: 2226.7498899	total: 1.72s	remaining: 4.53s
275:	learn: 2226.3826174	total: 1.73s	remaining: 4.53s
276:	learn: 2225.1456520	total: 1.73s	remaining: 4.52s
277:	learn: 2224.6893392	total: 1.74s	remaining: 4.51s
278:	learn: 2222.7238604	total: 1.75s	remaining: 4.51s
279:	learn: 2220.9794437	total: 1.75s	remaining: 4.5s
280:	learn: 2219.2772746	total: 1.76s	remaining: 4.5s
281:	learn: 2217.4638789	total: 1.76s	remaining: 4.49s
282:	learn: 2215.2710212	total: 1.77s	remaining: 4.49s
283:	learn: 2213.8374478	total: 1.78s	remaining: 4.48s
284:	learn: 2213.4277674	total: 1.78s	remaining: 4.47s
285:	learn: 2211.3747549	total: 1.79s	remaining: 4.46s
286:	learn: 2210.9939500	total: 1.79s	remaining: 4.46s
287:	learn: 2210.4672871	total: 1.8s	remaining: 4.45s
288:	learn: 2208.0419719	total: 1.8s	remaining: 4.44s
289:	learn: 2205.6687214	total: 1.81s	remaining: 4.43s
290:	learn: 2204.0098133	total: 1.82s	remaining: 4.43s
291:	learn: 2203.6478036	total: 1.82s	remaining: 4.42s
292:	learn: 2202.4326835	total: 1.83s	remaining: 4.41s
293:	learn: 2200.6478481	total: 1.83s	remaining: 4.4s
294:	learn: 2199.3330273	total: 1.84s	remaining: 4.39s
295:	learn: 2198.1228945	total: 1.84s	remaining: 4.38s
296:	learn: 2195.7625772	total: 1.85s	remaining: 4.37s
297:	learn: 2195.3184198	total: 1.86s	remaining: 4.39s
298:	learn: 2195.0797540	total: 1.87s	remaining: 4.38s
299:	learn: 2194.0416372	total: 1.87s	remaining: 4.37s
300:	learn: 2193.2579818	total: 1.88s	remaining: 4.36s
301:	learn: 2192.0421561	total: 1.89s	remaining: 4.36s
302:	learn: 2189.8916533	total: 1.89s	remaining: 4.35s
303:	learn: 2187.6162775	total: 1.9s	remaining: 4.34s
304:	learn: 2186.5523068	total: 1.9s	remaining: 4.33s
305:	learn: 2185.0648494	total: 1.91s	remaining: 4.33s
306:	learn: 2183.0338672	total: 1.91s	remaining: 4.32s
307:	learn: 2182.3582529	total: 1.92s	remaining: 4.31s
308:	learn: 2181.2600131	total: 1.92s	remaining: 4.3s
309:	learn: 2179.2779872	total: 1.93s	remaining: 4.3s
310:	learn: 2177.3968483	total: 1.94s	remaining: 4.29s
311:	learn: 2175.6936531	total: 1.94s	remaining: 4.29s
312:	learn: 2175.1346380	total: 1.95s	remaining: 4.28s
313:	learn: 2174.3948443	total: 1.96s	remaining: 4.27s
314:	learn: 2174.0686101	total: 1.96s	remaining: 4.26s
315:	learn: 2173.5648035	total: 1.97s	remaining: 4.26s
316:	learn: 2172.1935105	total: 1.97s	remaining: 4.25s
317:	learn: 2170.5700401	total: 1.98s	remaining: 4.25s
318:	learn: 2168.7470746	total: 1.99s	remaining: 4.24s
319:	learn: 2167.5437761	total: 1.99s	remaining: 4.23s
320:	learn: 2165.5790789	total: 2s	remaining: 4.22s
321:	learn: 2164.2609405	total: 2s	remaining: 4.22s
322:	learn: 2162.5119577	total: 2.01s	remaining: 4.21s
323:	learn: 2161.4811771	total: 2.02s	remaining: 4.21s
324:	learn: 2160.5458029	total: 2.02s	remaining: 4.2s
325:	learn: 2158.9245109	total: 2.03s	remaining: 4.19s
326:	learn: 2157.0096074	total: 2.04s	remaining: 4.19s
327:	learn: 2156.0953247	total: 2.04s	remaining: 4.18s
328:	learn: 2155.2997990	total: 2.05s	remaining: 4.18s
329:	learn: 2154.0676384	total: 2.05s	remaining: 4.17s
330:	learn: 2152.1472733	total: 2.06s	remaining: 4.17s
331:	learn: 2151.6773078	total: 2.07s	remaining: 4.16s
332:	learn: 2151.1380063	total: 2.07s	remaining: 4.15s
333:	learn: 2149.9980622	total: 2.08s	remaining: 4.14s
334:	learn: 2148.6561715	total: 2.08s	remaining: 4.14s
335:	learn: 2147.5979814	total: 2.09s	remaining: 4.13s
336:	learn: 2146.4366203	total: 2.1s	remaining: 4.14s
337:	learn: 2145.3221621	total: 2.11s	remaining: 4.14s
338:	learn: 2144.1606578	total: 2.12s	remaining: 4.13s
339:	learn: 2143.4114760	total: 2.12s	remaining: 4.12s
340:	learn: 2141.9134923	total: 2.13s	remaining: 4.11s
341:	learn: 2141.5027453	total: 2.13s	remaining: 4.1s
342:	learn: 2140.5973868	total: 2.14s	remaining: 4.1s
343:	learn: 2140.1711968	total: 2.14s	remaining: 4.09s
344:	learn: 2139.1309802	total: 2.15s	remaining: 4.08s
345:	learn: 2138.1651205	total: 2.15s	remaining: 4.07s
346:	learn: 2136.6793748	total: 2.16s	remaining: 4.07s
347:	learn: 2134.9271288	total: 2.17s	remaining: 4.06s
348:	learn: 2133.4990990	total: 2.17s	remaining: 4.05s
349:	learn: 2132.9579709	total: 2.18s	remaining: 4.04s
350:	learn: 2131.3568877	total: 2.18s	remaining: 4.03s
351:	learn: 2130.0934606	total: 2.19s	remaining: 4.03s
352:	learn: 2129.1291428	total: 2.19s	remaining: 4.01s
353:	learn: 2127.2119683	total: 2.19s	remaining: 4s
354:	learn: 2126.2466287	total: 2.2s	remaining: 4s
355:	learn: 2125.9083128	total: 2.21s	remaining: 3.99s
356:	learn: 2125.6451110	total: 2.21s	remaining: 3.98s
357:	learn: 2123.9260800	total: 2.21s	remaining: 3.97s
358:	learn: 2122.2698937	total: 2.22s	remaining: 3.96s
359:	learn: 2120.6817881	total: 2.22s	remaining: 3.95s
360:	learn: 2120.4331669	total: 2.23s	remaining: 3.94s
361:	learn: 2118.9594319	total: 2.23s	remaining: 3.93s
362:	learn: 2118.1315547	total: 2.23s	remaining: 3.92s
363:	learn: 2116.6849988	total: 2.24s	remaining: 3.91s
364:	learn: 2115.7430720	total: 2.24s	remaining: 3.9s
365:	learn: 2115.4083834	total: 2.25s	remaining: 3.89s
366:	learn: 2114.4777345	total: 2.25s	remaining: 3.88s
367:	learn: 2113.1287812	total: 2.25s	remaining: 3.87s
368:	learn: 2112.0570443	total: 2.26s	remaining: 3.86s
369:	learn: 2111.4712932	total: 2.26s	remaining: 3.85s
370:	learn: 2109.7901911	total: 2.27s	remaining: 3.85s
371:	learn: 2108.9720656	total: 2.27s	remaining: 3.84s
372:	learn: 2108.0837174	total: 2.28s	remaining: 3.83s
373:	learn: 2107.4173722	total: 2.29s	remaining: 3.83s
374:	learn: 2105.9077041	total: 2.29s	remaining: 3.82s
375:	learn: 2105.0434116	total: 2.29s	remaining: 3.81s
376:	learn: 2104.1928470	total: 2.3s	remaining: 3.8s
377:	learn: 2102.8518835	total: 2.3s	remaining: 3.79s
378:	learn: 2102.3280049	total: 2.31s	remaining: 3.78s
379:	learn: 2101.5647406	total: 2.31s	remaining: 3.77s
380:	learn: 2099.8922332	total: 2.32s	remaining: 3.76s
381:	learn: 2099.3944989	total: 2.32s	remaining: 3.76s
382:	learn: 2097.8868122	total: 2.33s	remaining: 3.75s
383:	learn: 2097.1129248	total: 2.34s	remaining: 3.75s
384:	learn: 2096.5131524	total: 2.34s	remaining: 3.74s
385:	learn: 2095.6390710	total: 2.35s	remaining: 3.73s
386:	learn: 2094.9377428	total: 2.35s	remaining: 3.73s
387:	learn: 2093.8320801	total: 2.36s	remaining: 3.72s
388:	learn: 2092.9831235	total: 2.36s	remaining: 3.71s
389:	learn: 2092.4251574	total: 2.37s	remaining: 3.7s
390:	learn: 2092.0229687	total: 2.37s	remaining: 3.7s
391:	learn: 2091.0829485	total: 2.38s	remaining: 3.69s
392:	learn: 2090.3250834	total: 2.38s	remaining: 3.68s
393:	learn: 2088.1079012	total: 2.39s	remaining: 3.68s
394:	learn: 2087.8986596	total: 2.4s	remaining: 3.67s
395:	learn: 2086.9032005	total: 2.4s	remaining: 3.66s
396:	learn: 2086.4395223	total: 2.41s	remaining: 3.65s
397:	learn: 2085.7447370	total: 2.41s	remaining: 3.65s
398:	learn: 2084.8575496	total: 2.42s	remaining: 3.64s
399:	learn: 2083.7415256	total: 2.42s	remaining: 3.63s
400:	learn: 2083.0369860	total: 2.43s	remaining: 3.63s
401:	learn: 2082.6884436	total: 2.43s	remaining: 3.62s
402:	learn: 2081.7538694	total: 2.44s	remaining: 3.61s
403:	learn: 2081.2170399	total: 2.44s	remaining: 3.6s
404:	learn: 2080.6131672	total: 2.45s	remaining: 3.59s
405:	learn: 2080.2306545	total: 2.45s	remaining: 3.58s
406:	learn: 2078.8843389	total: 2.45s	remaining: 3.58s
407:	learn: 2078.3923753	total: 2.46s	remaining: 3.57s
408:	learn: 2077.1229855	total: 2.46s	remaining: 3.56s
409:	learn: 2076.8295811	total: 2.47s	remaining: 3.55s
410:	learn: 2076.3552737	total: 2.47s	remaining: 3.54s
411:	learn: 2075.3314118	total: 2.48s	remaining: 3.54s
412:	learn: 2074.3620299	total: 2.48s	remaining: 3.53s
413:	learn: 2073.7485263	total: 2.49s	remaining: 3.52s
414:	learn: 2073.2672776	total: 2.49s	remaining: 3.52s
415:	learn: 2072.5119144	total: 2.5s	remaining: 3.51s
416:	learn: 2071.2966911	total: 2.5s	remaining: 3.5s
417:	learn: 2070.9719841	total: 2.51s	remaining: 3.49s
418:	learn: 2069.6649051	total: 2.51s	remaining: 3.49s
419:	learn: 2069.2530742	total: 2.52s	remaining: 3.48s
420:	learn: 2067.8643085	total: 2.52s	remaining: 3.47s
421:	learn: 2067.1527675	total: 2.53s	remaining: 3.46s
422:	learn: 2066.9025605	total: 2.53s	remaining: 3.45s
423:	learn: 2065.6107786	total: 2.54s	remaining: 3.45s
424:	learn: 2064.5494343	total: 2.54s	remaining: 3.44s
425:	learn: 2064.1986889	total: 2.55s	remaining: 3.43s
426:	learn: 2063.4126644	total: 2.55s	remaining: 3.42s
427:	learn: 2062.3657353	total: 2.56s	remaining: 3.42s
428:	learn: 2061.1856541	total: 2.56s	remaining: 3.41s
429:	learn: 2060.0766706	total: 2.56s	remaining: 3.4s
430:	learn: 2059.6011787	total: 2.58s	remaining: 3.4s
431:	learn: 2058.6919478	total: 2.58s	remaining: 3.39s
432:	learn: 2058.1497193	total: 2.58s	remaining: 3.38s
433:	learn: 2057.7045388	total: 2.58s	remaining: 3.37s
434:	learn: 2056.5844582	total: 2.59s	remaining: 3.36s
435:	learn: 2056.2417563	total: 2.59s	remaining: 3.35s
436:	learn: 2055.0813455	total: 2.6s	remaining: 3.35s
437:	learn: 2053.8201515	total: 2.6s	remaining: 3.34s
438:	learn: 2053.1830142	total: 2.6s	remaining: 3.33s
439:	learn: 2052.8203145	total: 2.61s	remaining: 3.32s
440:	learn: 2052.0275189	total: 2.61s	remaining: 3.31s
441:	learn: 2050.9832104	total: 2.62s	remaining: 3.3s
442:	learn: 2050.5939400	total: 2.62s	remaining: 3.29s
443:	learn: 2049.6303754	total: 2.62s	remaining: 3.28s
444:	learn: 2049.2536153	total: 2.63s	remaining: 3.28s
445:	learn: 2048.6044518	total: 2.63s	remaining: 3.27s
446:	learn: 2047.9174875	total: 2.64s	remaining: 3.27s
447:	learn: 2047.1583329	total: 2.64s	remaining: 3.26s
448:	learn: 2046.9824522	total: 2.65s	remaining: 3.25s
449:	learn: 2046.3691624	total: 2.65s	remaining: 3.24s
450:	learn: 2045.4732471	total: 2.66s	remaining: 3.23s
451:	learn: 2044.6858476	total: 2.66s	remaining: 3.23s
452:	learn: 2043.5823245	total: 2.67s	remaining: 3.22s
453:	learn: 2042.6867647	total: 2.67s	remaining: 3.21s
454:	learn: 2042.3752872	total: 2.67s	remaining: 3.2s
455:	learn: 2041.1571101	total: 2.68s	remaining: 3.19s
456:	learn: 2039.8882417	total: 2.68s	remaining: 3.18s
457:	learn: 2038.8534851	total: 2.69s	remaining: 3.18s
458:	learn: 2037.9920866	total: 2.69s	remaining: 3.17s
459:	learn: 2037.4303184	total: 2.69s	remaining: 3.16s
460:	learn: 2036.6993182	total: 2.7s	remaining: 3.16s
461:	learn: 2035.9009810	total: 2.71s	remaining: 3.15s
462:	learn: 2035.0864239	total: 2.71s	remaining: 3.14s
463:	learn: 2034.3824600	total: 2.71s	remaining: 3.13s
464:	learn: 2034.1993500	total: 2.72s	remaining: 3.13s
465:	learn: 2033.3609059	total: 2.72s	remaining: 3.12s
466:	learn: 2032.9821501	total: 2.73s	remaining: 3.11s
467:	learn: 2032.5482135	total: 2.73s	remaining: 3.1s
468:	learn: 2032.1024379	total: 2.74s	remaining: 3.1s
469:	learn: 2031.6525294	total: 2.74s	remaining: 3.09s
470:	learn: 2031.1200697	total: 2.75s	remaining: 3.08s
471:	learn: 2030.4309641	total: 2.75s	remaining: 3.08s
472:	learn: 2029.9703506	total: 2.76s	remaining: 3.07s
473:	learn: 2029.1619608	total: 2.76s	remaining: 3.06s
474:	learn: 2028.3173172	total: 2.77s	remaining: 3.06s
475:	learn: 2027.4450816	total: 2.77s	remaining: 3.05s
476:	learn: 2026.4852248	total: 2.78s	remaining: 3.05s
477:	learn: 2025.7125620	total: 2.78s	remaining: 3.04s
478:	learn: 2024.4385398	total: 2.79s	remaining: 3.03s
479:	learn: 2023.7573534	total: 2.79s	remaining: 3.03s
480:	learn: 2022.9267016	total: 2.8s	remaining: 3.02s
481:	learn: 2022.5285079	total: 2.81s	remaining: 3.02s
482:	learn: 2021.6201313	total: 2.82s	remaining: 3.02s
483:	learn: 2020.6956057	total: 2.83s	remaining: 3.01s
484:	learn: 2019.6415235	total: 2.83s	remaining: 3.01s
485:	learn: 2019.2660694	total: 2.84s	remaining: 3s
486:	learn: 2018.9854237	total: 2.84s	remaining: 3s
487:	learn: 2017.8809698	total: 2.85s	remaining: 2.99s
488:	learn: 2016.5544473	total: 2.85s	remaining: 2.98s
489:	learn: 2016.2635297	total: 2.86s	remaining: 2.98s
490:	learn: 2015.9216242	total: 2.87s	remaining: 2.97s
491:	learn: 2015.3919735	total: 2.87s	remaining: 2.97s
492:	learn: 2014.7855567	total: 2.88s	remaining: 2.96s
493:	learn: 2014.4619330	total: 2.89s	remaining: 2.96s
494:	learn: 2013.9176550	total: 2.89s	remaining: 2.95s
495:	learn: 2013.6272291	total: 2.9s	remaining: 2.94s
496:	learn: 2012.8434483	total: 2.9s	remaining: 2.94s
497:	learn: 2012.1226817	total: 2.91s	remaining: 2.94s
498:	learn: 2011.3481377	total: 2.92s	remaining: 2.93s
499:	learn: 2010.4012031	total: 2.92s	remaining: 2.92s
500:	learn: 2009.4223510	total: 2.93s	remaining: 2.92s
501:	learn: 2007.9642496	total: 2.93s	remaining: 2.91s
502:	learn: 2006.6061373	total: 2.94s	remaining: 2.91s
503:	learn: 2006.3496774	total: 2.95s	remaining: 2.9s
504:	learn: 2005.8526902	total: 2.96s	remaining: 2.9s
505:	learn: 2004.7208606	total: 2.96s	remaining: 2.89s
506:	learn: 2003.4356822	total: 2.97s	remaining: 2.89s
507:	learn: 2002.9549950	total: 2.98s	remaining: 2.88s
508:	learn: 2002.2660866	total: 2.98s	remaining: 2.88s
509:	learn: 2001.7655635	total: 2.99s	remaining: 2.87s
510:	learn: 2001.4857925	total: 2.99s	remaining: 2.86s
511:	learn: 2000.8821102	total: 3s	remaining: 2.86s
512:	learn: 2000.1964240	total: 3.01s	remaining: 2.86s
513:	learn: 1999.7786055	total: 3.02s	remaining: 2.85s
514:	learn: 1998.5400249	total: 3.02s	remaining: 2.85s
515:	learn: 1998.0114463	total: 3.03s	remaining: 2.84s
516:	learn: 1996.9762909	total: 3.03s	remaining: 2.83s
517:	learn: 1995.6335126	total: 3.04s	remaining: 2.83s
518:	learn: 1995.3818569	total: 3.04s	remaining: 2.82s
519:	learn: 1994.6836261	total: 3.05s	remaining: 2.82s
520:	learn: 1993.7077334	total: 3.07s	remaining: 2.82s
521:	learn: 1993.4826742	total: 3.08s	remaining: 2.82s
522:	learn: 1992.4700473	total: 3.08s	remaining: 2.81s
523:	learn: 1991.3567304	total: 3.09s	remaining: 2.81s
524:	learn: 1990.6762970	total: 3.09s	remaining: 2.8s
525:	learn: 1989.5606652	total: 3.1s	remaining: 2.79s
526:	learn: 1988.7483715	total: 3.1s	remaining: 2.79s
527:	learn: 1987.7319439	total: 3.11s	remaining: 2.78s
528:	learn: 1987.4859471	total: 3.12s	remaining: 2.77s
529:	learn: 1986.5776344	total: 3.12s	remaining: 2.77s
530:	learn: 1985.8532232	total: 3.13s	remaining: 2.76s
531:	learn: 1985.2637417	total: 3.13s	remaining: 2.76s
532:	learn: 1984.6465808	total: 3.14s	remaining: 2.75s
533:	learn: 1984.0405309	total: 3.14s	remaining: 2.74s
534:	learn: 1983.5475987	total: 3.15s	remaining: 2.73s
535:	learn: 1982.5973042	total: 3.15s	remaining: 2.73s
536:	learn: 1981.9642603	total: 3.16s	remaining: 2.72s
537:	learn: 1981.1852376	total: 3.16s	remaining: 2.72s
538:	learn: 1980.4182747	total: 3.17s	remaining: 2.71s
539:	learn: 1978.7333318	total: 3.18s	remaining: 2.71s
540:	learn: 1977.6620993	total: 3.18s	remaining: 2.7s
541:	learn: 1976.1489712	total: 3.19s	remaining: 2.69s
542:	learn: 1976.0680284	total: 3.19s	remaining: 2.69s
543:	learn: 1975.6461332	total: 3.2s	remaining: 2.68s
544:	learn: 1974.9214418	total: 3.21s	remaining: 2.68s
545:	learn: 1973.9625504	total: 3.21s	remaining: 2.67s
546:	learn: 1973.3496809	total: 3.22s	remaining: 2.66s
547:	learn: 1973.1396807	total: 3.22s	remaining: 2.66s
548:	learn: 1972.5971978	total: 3.23s	remaining: 2.65s
549:	learn: 1971.8903485	total: 3.23s	remaining: 2.65s
550:	learn: 1970.7768482	total: 3.24s	remaining: 2.64s
551:	learn: 1970.1405185	total: 3.25s	remaining: 2.63s
552:	learn: 1969.7304828	total: 3.25s	remaining: 2.63s
553:	learn: 1969.3697204	total: 3.26s	remaining: 2.62s
554:	learn: 1968.8312174	total: 3.27s	remaining: 2.62s
555:	learn: 1967.7808421	total: 3.27s	remaining: 2.61s
556:	learn: 1967.4342981	total: 3.28s	remaining: 2.61s
557:	learn: 1966.9093022	total: 3.28s	remaining: 2.6s
558:	learn: 1966.4605710	total: 3.29s	remaining: 2.6s
559:	learn: 1966.2404654	total: 3.3s	remaining: 2.59s
560:	learn: 1966.0851063	total: 3.31s	remaining: 2.59s
561:	learn: 1965.0358799	total: 3.31s	remaining: 2.58s
562:	learn: 1964.3943368	total: 3.32s	remaining: 2.58s
563:	learn: 1964.1858422	total: 3.32s	remaining: 2.57s
564:	learn: 1963.5435064	total: 3.33s	remaining: 2.56s
565:	learn: 1962.6070521	total: 3.33s	remaining: 2.56s
566:	learn: 1962.0599152	total: 3.34s	remaining: 2.55s
567:	learn: 1961.5265994	total: 3.34s	remaining: 2.54s
568:	learn: 1961.1122149	total: 3.35s	remaining: 2.54s
569:	learn: 1960.8238389	total: 3.35s	remaining: 2.53s
570:	learn: 1960.3653494	total: 3.35s	remaining: 2.52s
571:	learn: 1960.0073179	total: 3.36s	remaining: 2.51s
572:	learn: 1959.5008133	total: 3.36s	remaining: 2.51s
573:	learn: 1959.2899951	total: 3.37s	remaining: 2.5s
574:	learn: 1958.6013168	total: 3.37s	remaining: 2.49s
575:	learn: 1957.6812709	total: 3.37s	remaining: 2.48s
576:	learn: 1957.4891319	total: 3.38s	remaining: 2.48s
577:	learn: 1957.2146640	total: 3.38s	remaining: 2.47s
578:	learn: 1956.7715926	total: 3.39s	remaining: 2.46s
579:	learn: 1956.0808357	total: 3.4s	remaining: 2.46s
580:	learn: 1955.3695098	total: 3.4s	remaining: 2.45s
581:	learn: 1955.2132138	total: 3.4s	remaining: 2.44s
582:	learn: 1954.8613484	total: 3.41s	remaining: 2.44s
583:	learn: 1954.4069408	total: 3.41s	remaining: 2.43s
584:	learn: 1953.8397475	total: 3.42s	remaining: 2.42s
585:	learn: 1953.1768933	total: 3.42s	remaining: 2.42s
586:	learn: 1952.4782126	total: 3.42s	remaining: 2.41s
587:	learn: 1952.0286119	total: 3.43s	remaining: 2.4s
588:	learn: 1950.9369038	total: 3.43s	remaining: 2.4s
589:	learn: 1950.4146139	total: 3.44s	remaining: 2.39s
590:	learn: 1950.1282394	total: 3.44s	remaining: 2.38s
591:	learn: 1949.7933365	total: 3.44s	remaining: 2.37s
592:	learn: 1949.0080405	total: 3.45s	remaining: 2.37s
593:	learn: 1948.2223420	total: 3.46s	remaining: 2.36s
594:	learn: 1947.9928856	total: 3.46s	remaining: 2.35s
595:	learn: 1947.2646529	total: 3.47s	remaining: 2.35s
596:	learn: 1946.8385769	total: 3.47s	remaining: 2.34s
597:	learn: 1946.3798361	total: 3.48s	remaining: 2.34s
598:	learn: 1945.9209756	total: 3.48s	remaining: 2.33s
599:	learn: 1945.4277347	total: 3.49s	remaining: 2.33s
600:	learn: 1944.8442789	total: 3.5s	remaining: 2.32s
601:	learn: 1944.6315215	total: 3.5s	remaining: 2.31s
602:	learn: 1944.1470225	total: 3.5s	remaining: 2.31s
603:	learn: 1943.4117957	total: 3.51s	remaining: 2.3s
604:	learn: 1942.7604691	total: 3.51s	remaining: 2.29s
605:	learn: 1941.9747941	total: 3.52s	remaining: 2.29s
606:	learn: 1941.1599281	total: 3.52s	remaining: 2.28s
607:	learn: 1940.7563392	total: 3.53s	remaining: 2.27s
608:	learn: 1940.4153862	total: 3.53s	remaining: 2.27s
609:	learn: 1939.6180834	total: 3.55s	remaining: 2.27s
610:	learn: 1939.0908116	total: 3.56s	remaining: 2.26s
611:	learn: 1938.5568661	total: 3.56s	remaining: 2.26s
612:	learn: 1938.1051539	total: 3.57s	remaining: 2.25s
613:	learn: 1937.2394544	total: 3.57s	remaining: 2.25s
614:	learn: 1936.8663732	total: 3.58s	remaining: 2.24s
615:	learn: 1936.4281529	total: 3.58s	remaining: 2.23s
616:	learn: 1936.0937120	total: 3.59s	remaining: 2.23s
617:	learn: 1935.7719810	total: 3.59s	remaining: 2.22s
618:	learn: 1935.2913645	total: 3.6s	remaining: 2.21s
619:	learn: 1934.4904903	total: 3.6s	remaining: 2.21s
620:	learn: 1934.1800849	total: 3.61s	remaining: 2.2s
621:	learn: 1933.4538768	total: 3.62s	remaining: 2.2s
622:	learn: 1933.0993932	total: 3.62s	remaining: 2.19s
623:	learn: 1932.6865162	total: 3.63s	remaining: 2.18s
624:	learn: 1931.9273461	total: 3.63s	remaining: 2.18s
625:	learn: 1931.2822183	total: 3.63s	remaining: 2.17s
626:	learn: 1930.4819312	total: 3.64s	remaining: 2.16s
627:	learn: 1929.8859706	total: 3.64s	remaining: 2.16s
628:	learn: 1929.5041891	total: 3.65s	remaining: 2.15s
629:	learn: 1929.3558970	total: 3.65s	remaining: 2.14s
630:	learn: 1929.1645494	total: 3.65s	remaining: 2.14s
631:	learn: 1928.9195363	total: 3.66s	remaining: 2.13s
632:	learn: 1928.2221433	total: 3.66s	remaining: 2.12s
633:	learn: 1927.9877195	total: 3.67s	remaining: 2.12s
634:	learn: 1927.2350665	total: 3.67s	remaining: 2.11s
635:	learn: 1927.0566618	total: 3.67s	remaining: 2.1s
636:	learn: 1926.4536636	total: 3.68s	remaining: 2.1s
637:	learn: 1925.9713985	total: 3.69s	remaining: 2.09s
638:	learn: 1925.0475491	total: 3.69s	remaining: 2.09s
639:	learn: 1924.7392229	total: 3.7s	remaining: 2.08s
640:	learn: 1924.2557266	total: 3.7s	remaining: 2.07s
641:	learn: 1923.6991232	total: 3.71s	remaining: 2.07s
642:	learn: 1923.1585026	total: 3.72s	remaining: 2.06s
643:	learn: 1922.2830460	total: 3.72s	remaining: 2.06s
644:	learn: 1921.6582178	total: 3.73s	remaining: 2.05s
645:	learn: 1921.4140145	total: 3.74s	remaining: 2.05s
646:	learn: 1920.7957001	total: 3.74s	remaining: 2.04s
647:	learn: 1920.4441658	total: 3.75s	remaining: 2.04s
648:	learn: 1919.3724824	total: 3.76s	remaining: 2.03s
649:	learn: 1919.1538698	total: 3.76s	remaining: 2.02s
650:	learn: 1918.5537974	total: 3.77s	remaining: 2.02s
651:	learn: 1917.9168811	total: 3.77s	remaining: 2.01s
652:	learn: 1917.4812191	total: 3.78s	remaining: 2.01s
653:	learn: 1916.8367083	total: 3.79s	remaining: 2s
654:	learn: 1916.2671496	total: 3.8s	remaining: 2s
655:	learn: 1916.0028790	total: 3.8s	remaining: 1.99s
656:	learn: 1915.4531962	total: 3.81s	remaining: 1.99s
657:	learn: 1914.7265362	total: 3.81s	remaining: 1.98s
658:	learn: 1914.2940974	total: 3.82s	remaining: 1.98s
659:	learn: 1913.9447105	total: 3.83s	remaining: 1.97s
660:	learn: 1913.2531448	total: 3.83s	remaining: 1.97s
661:	learn: 1912.5262362	total: 3.84s	remaining: 1.96s
662:	learn: 1911.8972712	total: 3.84s	remaining: 1.95s
663:	learn: 1911.3302694	total: 3.85s	remaining: 1.95s
664:	learn: 1909.9768217	total: 3.86s	remaining: 1.94s
665:	learn: 1909.3816338	total: 3.86s	remaining: 1.94s
666:	learn: 1909.1329520	total: 3.87s	remaining: 1.93s
667:	learn: 1908.7373833	total: 3.87s	remaining: 1.93s
668:	learn: 1908.1893120	total: 3.88s	remaining: 1.92s
669:	learn: 1907.6071535	total: 3.89s	remaining: 1.92s
670:	learn: 1907.2083059	total: 3.89s	remaining: 1.91s
671:	learn: 1906.7641559	total: 3.9s	remaining: 1.9s
672:	learn: 1906.3853849	total: 3.91s	remaining: 1.9s
673:	learn: 1905.6993526	total: 3.91s	remaining: 1.89s
674:	learn: 1904.7655996	total: 3.92s	remaining: 1.89s
675:	learn: 1904.6211307	total: 3.92s	remaining: 1.88s
676:	learn: 1904.3015767	total: 3.93s	remaining: 1.87s
677:	learn: 1903.8347608	total: 3.94s	remaining: 1.87s
678:	learn: 1903.0930039	total: 3.94s	remaining: 1.86s
679:	learn: 1902.6803103	total: 3.95s	remaining: 1.86s
680:	learn: 1902.4831595	total: 3.95s	remaining: 1.85s
681:	learn: 1902.0651061	total: 3.96s	remaining: 1.85s
682:	learn: 1901.8939651	total: 3.97s	remaining: 1.84s
683:	learn: 1901.5747207	total: 3.97s	remaining: 1.83s
684:	learn: 1901.4702575	total: 3.98s	remaining: 1.83s
685:	learn: 1901.2960615	total: 3.98s	remaining: 1.82s
686:	learn: 1900.9686744	total: 3.99s	remaining: 1.82s
687:	learn: 1900.1783409	total: 4s	remaining: 1.81s
688:	learn: 1898.9674669	total: 4s	remaining: 1.81s
689:	learn: 1898.7505151	total: 4.01s	remaining: 1.8s
690:	learn: 1898.0828143	total: 4.01s	remaining: 1.79s
691:	learn: 1897.8864721	total: 4.03s	remaining: 1.79s
692:	learn: 1897.6494806	total: 4.04s	remaining: 1.79s
693:	learn: 1897.2676098	total: 4.04s	remaining: 1.78s
694:	learn: 1896.9141425	total: 4.05s	remaining: 1.78s
695:	learn: 1896.4950852	total: 4.05s	remaining: 1.77s
696:	learn: 1896.1975518	total: 4.06s	remaining: 1.76s
697:	learn: 1895.7470704	total: 4.07s	remaining: 1.76s
698:	learn: 1895.1638165	total: 4.07s	remaining: 1.75s
699:	learn: 1894.3945945	total: 4.08s	remaining: 1.75s
700:	learn: 1893.8565117	total: 4.08s	remaining: 1.74s
701:	learn: 1893.2077595	total: 4.09s	remaining: 1.74s
702:	learn: 1892.9506973	total: 4.09s	remaining: 1.73s
703:	learn: 1892.0310965	total: 4.1s	remaining: 1.72s
704:	learn: 1891.6653710	total: 4.11s	remaining: 1.72s
705:	learn: 1891.2893392	total: 4.11s	remaining: 1.71s
706:	learn: 1890.8579018	total: 4.12s	remaining: 1.71s
707:	learn: 1890.4723481	total: 4.13s	remaining: 1.7s
708:	learn: 1889.4352253	total: 4.13s	remaining: 1.7s
709:	learn: 1888.9559607	total: 4.14s	remaining: 1.69s
710:	learn: 1888.4409118	total: 4.14s	remaining: 1.68s
711:	learn: 1887.9676716	total: 4.15s	remaining: 1.68s
712:	learn: 1887.7094590	total: 4.16s	remaining: 1.67s
713:	learn: 1887.3936179	total: 4.16s	remaining: 1.67s
714:	learn: 1887.0250097	total: 4.17s	remaining: 1.66s
715:	learn: 1885.9298356	total: 4.17s	remaining: 1.66s
716:	learn: 1885.6146034	total: 4.18s	remaining: 1.65s
717:	learn: 1885.3138020	total: 4.19s	remaining: 1.64s
718:	learn: 1884.7486559	total: 4.19s	remaining: 1.64s
719:	learn: 1884.5278681	total: 4.2s	remaining: 1.63s
720:	learn: 1884.0543104	total: 4.21s	remaining: 1.63s
721:	learn: 1883.7689942	total: 4.21s	remaining: 1.62s
722:	learn: 1883.2359678	total: 4.22s	remaining: 1.61s
723:	learn: 1882.7900670	total: 4.22s	remaining: 1.61s
724:	learn: 1882.0417508	total: 4.23s	remaining: 1.6s
725:	learn: 1881.6338492	total: 4.24s	remaining: 1.6s
726:	learn: 1881.0474060	total: 4.24s	remaining: 1.59s
727:	learn: 1880.5159664	total: 4.25s	remaining: 1.59s
728:	learn: 1879.9577001	total: 4.25s	remaining: 1.58s
729:	learn: 1879.6673509	total: 4.26s	remaining: 1.57s
730:	learn: 1879.4652901	total: 4.28s	remaining: 1.57s
731:	learn: 1878.8784480	total: 4.29s	remaining: 1.57s
732:	learn: 1878.4928173	total: 4.29s	remaining: 1.56s
733:	learn: 1877.7303874	total: 4.3s	remaining: 1.56s
734:	learn: 1877.4778627	total: 4.31s	remaining: 1.55s
735:	learn: 1877.0672062	total: 4.31s	remaining: 1.55s
736:	learn: 1876.7347534	total: 4.31s	remaining: 1.54s
737:	learn: 1876.1374506	total: 4.32s	remaining: 1.53s
738:	learn: 1875.8713518	total: 4.32s	remaining: 1.53s
739:	learn: 1875.1213789	total: 4.32s	remaining: 1.52s
740:	learn: 1874.2548331	total: 4.33s	remaining: 1.51s
741:	learn: 1873.8516541	total: 4.33s	remaining: 1.51s
742:	learn: 1873.2149389	total: 4.33s	remaining: 1.5s
743:	learn: 1872.7431538	total: 4.34s	remaining: 1.49s
744:	learn: 1871.7986992	total: 4.34s	remaining: 1.49s
745:	learn: 1871.6302371	total: 4.35s	remaining: 1.48s
746:	learn: 1870.8660665	total: 4.35s	remaining: 1.47s
747:	learn: 1870.4658899	total: 4.35s	remaining: 1.47s
748:	learn: 1869.6587752	total: 4.36s	remaining: 1.46s
749:	learn: 1869.3390240	total: 4.36s	remaining: 1.45s
750:	learn: 1868.8674037	total: 4.37s	remaining: 1.45s
751:	learn: 1868.3279014	total: 4.37s	remaining: 1.44s
752:	learn: 1868.1685566	total: 4.37s	remaining: 1.43s
753:	learn: 1867.7672232	total: 4.38s	remaining: 1.43s
754:	learn: 1866.9935289	total: 4.38s	remaining: 1.42s
755:	learn: 1866.4377809	total: 4.38s	remaining: 1.42s
756:	learn: 1865.7189083	total: 4.39s	remaining: 1.41s
757:	learn: 1865.3495852	total: 4.39s	remaining: 1.4s
758:	learn: 1864.9523783	total: 4.4s	remaining: 1.4s
759:	learn: 1864.5387007	total: 4.4s	remaining: 1.39s
760:	learn: 1863.9160700	total: 4.4s	remaining: 1.38s
761:	learn: 1863.5789544	total: 4.41s	remaining: 1.38s
762:	learn: 1863.0824206	total: 4.42s	remaining: 1.37s
763:	learn: 1862.7023531	total: 4.42s	remaining: 1.36s
764:	learn: 1862.0206040	total: 4.43s	remaining: 1.36s
765:	learn: 1861.7955833	total: 4.43s	remaining: 1.35s
766:	learn: 1861.4609954	total: 4.44s	remaining: 1.35s
767:	learn: 1861.0050528	total: 4.44s	remaining: 1.34s
768:	learn: 1860.7003573	total: 4.45s	remaining: 1.34s
769:	learn: 1860.3088270	total: 4.45s	remaining: 1.33s
770:	learn: 1859.9081828	total: 4.46s	remaining: 1.32s
771:	learn: 1859.6819268	total: 4.47s	remaining: 1.32s
772:	learn: 1859.0808768	total: 4.47s	remaining: 1.31s
773:	learn: 1858.6331858	total: 4.47s	remaining: 1.31s
774:	learn: 1858.1487309	total: 4.48s	remaining: 1.3s
775:	learn: 1857.7579143	total: 4.48s	remaining: 1.29s
776:	learn: 1857.4257328	total: 4.49s	remaining: 1.29s
777:	learn: 1857.2393679	total: 4.49s	remaining: 1.28s
778:	learn: 1856.4111159	total: 4.49s	remaining: 1.27s
779:	learn: 1855.4749348	total: 4.5s	remaining: 1.27s
780:	learn: 1855.1780536	total: 4.51s	remaining: 1.26s
781:	learn: 1854.8170596	total: 4.52s	remaining: 1.26s
782:	learn: 1854.2767736	total: 4.52s	remaining: 1.25s
783:	learn: 1853.6950934	total: 4.53s	remaining: 1.25s
784:	learn: 1853.2197965	total: 4.54s	remaining: 1.24s
785:	learn: 1852.6827726	total: 4.54s	remaining: 1.24s
786:	learn: 1852.1942178	total: 4.55s	remaining: 1.23s
787:	learn: 1851.6440378	total: 4.55s	remaining: 1.22s
788:	learn: 1851.1398290	total: 4.55s	remaining: 1.22s
789:	learn: 1850.7393305	total: 4.56s	remaining: 1.21s
790:	learn: 1850.3050427	total: 4.56s	remaining: 1.21s
791:	learn: 1849.9361299	total: 4.57s	remaining: 1.2s
792:	learn: 1849.6150211	total: 4.57s	remaining: 1.19s
793:	learn: 1848.9374366	total: 4.57s	remaining: 1.19s
794:	learn: 1848.6186723	total: 4.58s	remaining: 1.18s
795:	learn: 1848.4076063	total: 4.58s	remaining: 1.17s
796:	learn: 1848.0898842	total: 4.59s	remaining: 1.17s
797:	learn: 1847.7880502	total: 4.59s	remaining: 1.16s
798:	learn: 1847.5132685	total: 4.6s	remaining: 1.16s
799:	learn: 1847.3287212	total: 4.6s	remaining: 1.15s
800:	learn: 1846.8019638	total: 4.61s	remaining: 1.14s
801:	learn: 1846.1562363	total: 4.61s	remaining: 1.14s
802:	learn: 1845.4062519	total: 4.62s	remaining: 1.13s
803:	learn: 1844.9209396	total: 4.63s	remaining: 1.13s
804:	learn: 1844.6565360	total: 4.63s	remaining: 1.12s
805:	learn: 1843.9883993	total: 4.64s	remaining: 1.12s
806:	learn: 1843.4736788	total: 4.64s	remaining: 1.11s
807:	learn: 1842.9980443	total: 4.65s	remaining: 1.1s
808:	learn: 1842.7250484	total: 4.66s	remaining: 1.1s
809:	learn: 1841.8599437	total: 4.66s	remaining: 1.09s
810:	learn: 1841.4387698	total: 4.67s	remaining: 1.09s
811:	learn: 1841.0499803	total: 4.67s	remaining: 1.08s
812:	learn: 1840.6389994	total: 4.68s	remaining: 1.08s
813:	learn: 1840.5824346	total: 4.68s	remaining: 1.07s
814:	learn: 1840.0460492	total: 4.69s	remaining: 1.06s
815:	learn: 1839.7358153	total: 4.69s	remaining: 1.06s
816:	learn: 1839.1758207	total: 4.7s	remaining: 1.05s
817:	learn: 1838.7747853	total: 4.71s	remaining: 1.05s
818:	learn: 1838.6663974	total: 4.71s	remaining: 1.04s
819:	learn: 1838.1439184	total: 4.72s	remaining: 1.03s
820:	learn: 1837.8198129	total: 4.72s	remaining: 1.03s
821:	learn: 1836.9727506	total: 4.73s	remaining: 1.02s
822:	learn: 1836.6343852	total: 4.73s	remaining: 1.02s
823:	learn: 1836.2584488	total: 4.74s	remaining: 1.01s
824:	learn: 1836.2507165	total: 4.74s	remaining: 1s
825:	learn: 1835.8576619	total: 4.75s	remaining: 1s
826:	learn: 1835.6178868	total: 4.76s	remaining: 995ms
827:	learn: 1835.4892071	total: 4.76s	remaining: 989ms
828:	learn: 1835.1629321	total: 4.76s	remaining: 983ms
829:	learn: 1834.9497893	total: 4.77s	remaining: 977ms
830:	learn: 1834.5549875	total: 4.77s	remaining: 971ms
831:	learn: 1834.0988549	total: 4.78s	remaining: 965ms
832:	learn: 1833.8189952	total: 4.78s	remaining: 958ms
833:	learn: 1833.3335242	total: 4.79s	remaining: 953ms
834:	learn: 1832.9733766	total: 4.79s	remaining: 946ms
835:	learn: 1832.8071890	total: 4.79s	remaining: 940ms
836:	learn: 1832.5660391	total: 4.8s	remaining: 934ms
837:	learn: 1831.9180126	total: 4.8s	remaining: 928ms
838:	learn: 1831.1675342	total: 4.8s	remaining: 922ms
839:	learn: 1830.7563210	total: 4.81s	remaining: 916ms
840:	learn: 1830.3246842	total: 4.81s	remaining: 910ms
841:	learn: 1830.0657664	total: 4.82s	remaining: 904ms
842:	learn: 1829.4229004	total: 4.82s	remaining: 898ms
843:	learn: 1828.7315034	total: 4.83s	remaining: 892ms
844:	learn: 1828.4512671	total: 4.83s	remaining: 886ms
845:	learn: 1827.8254092	total: 4.83s	remaining: 880ms
846:	learn: 1827.2232798	total: 4.84s	remaining: 874ms
847:	learn: 1826.6298672	total: 4.84s	remaining: 868ms
848:	learn: 1825.9501280	total: 4.84s	remaining: 862ms
849:	learn: 1825.7641712	total: 4.85s	remaining: 855ms
850:	learn: 1825.4470687	total: 4.85s	remaining: 850ms
851:	learn: 1825.1894602	total: 4.86s	remaining: 844ms
852:	learn: 1824.7547814	total: 4.86s	remaining: 838ms
853:	learn: 1824.1378522	total: 4.87s	remaining: 832ms
854:	learn: 1823.3788765	total: 4.87s	remaining: 826ms
855:	learn: 1823.2798198	total: 4.88s	remaining: 821ms
856:	learn: 1822.8942754	total: 4.88s	remaining: 815ms
857:	learn: 1822.6624884	total: 4.89s	remaining: 809ms
858:	learn: 1822.3470225	total: 4.89s	remaining: 803ms
859:	learn: 1822.0552062	total: 4.9s	remaining: 798ms
860:	learn: 1821.4936337	total: 4.91s	remaining: 792ms
861:	learn: 1821.3556329	total: 4.91s	remaining: 786ms
862:	learn: 1821.0457434	total: 4.92s	remaining: 780ms
863:	learn: 1820.3942371	total: 4.92s	remaining: 775ms
864:	learn: 1820.1204935	total: 4.93s	remaining: 769ms
865:	learn: 1819.9671185	total: 4.93s	remaining: 763ms
866:	learn: 1819.8749744	total: 4.94s	remaining: 757ms
867:	learn: 1819.6347894	total: 4.94s	remaining: 751ms
868:	learn: 1819.3199081	total: 4.95s	remaining: 746ms
869:	learn: 1818.9499363	total: 4.95s	remaining: 740ms
870:	learn: 1818.5044571	total: 4.96s	remaining: 734ms
871:	learn: 1817.8638811	total: 4.96s	remaining: 728ms
872:	learn: 1817.4098410	total: 4.96s	remaining: 722ms
873:	learn: 1817.0773847	total: 4.97s	remaining: 716ms
874:	learn: 1816.7364761	total: 4.97s	remaining: 710ms
875:	learn: 1816.4416540	total: 4.97s	remaining: 704ms
876:	learn: 1815.9989090	total: 4.98s	remaining: 698ms
877:	learn: 1815.8309861	total: 4.98s	remaining: 692ms
878:	learn: 1815.0542665	total: 4.99s	remaining: 687ms
879:	learn: 1814.6943249	total: 5s	remaining: 681ms
880:	learn: 1814.2092583	total: 5s	remaining: 676ms
881:	learn: 1813.8651675	total: 5s	remaining: 670ms
882:	learn: 1813.5007452	total: 5.01s	remaining: 664ms
883:	learn: 1813.1900335	total: 5.01s	remaining: 658ms
884:	learn: 1812.7738534	total: 5.02s	remaining: 652ms
885:	learn: 1812.4607602	total: 5.02s	remaining: 646ms
886:	learn: 1811.8562410	total: 5.02s	remaining: 640ms
887:	learn: 1811.5294675	total: 5.03s	remaining: 634ms
888:	learn: 1811.0188588	total: 5.03s	remaining: 628ms
889:	learn: 1810.7853011	total: 5.04s	remaining: 622ms
890:	learn: 1810.2405183	total: 5.04s	remaining: 616ms
891:	learn: 1810.0319371	total: 5.04s	remaining: 611ms
892:	learn: 1809.8140306	total: 5.04s	remaining: 605ms
893:	learn: 1809.2706172	total: 5.05s	remaining: 599ms
894:	learn: 1808.7636476	total: 5.05s	remaining: 593ms
895:	learn: 1808.5795658	total: 5.06s	remaining: 587ms
896:	learn: 1808.2682456	total: 5.06s	remaining: 581ms
897:	learn: 1807.6999513	total: 5.07s	remaining: 575ms
898:	learn: 1807.1011318	total: 5.07s	remaining: 570ms
899:	learn: 1806.9458012	total: 5.07s	remaining: 564ms
900:	learn: 1806.4977533	total: 5.08s	remaining: 558ms
901:	learn: 1805.7089914	total: 5.08s	remaining: 552ms
902:	learn: 1805.3878730	total: 5.08s	remaining: 546ms
903:	learn: 1804.9064780	total: 5.09s	remaining: 540ms
904:	learn: 1804.6975135	total: 5.09s	remaining: 534ms
905:	learn: 1804.2327366	total: 5.09s	remaining: 529ms
906:	learn: 1803.5734829	total: 5.1s	remaining: 523ms
907:	learn: 1803.2696864	total: 5.1s	remaining: 517ms
908:	learn: 1802.8716096	total: 5.11s	remaining: 511ms
909:	learn: 1802.3973925	total: 5.11s	remaining: 505ms
910:	learn: 1802.0958028	total: 5.11s	remaining: 500ms
911:	learn: 1801.2755515	total: 5.12s	remaining: 494ms
912:	learn: 1801.0253090	total: 5.12s	remaining: 488ms
913:	learn: 1800.8272483	total: 5.13s	remaining: 482ms
914:	learn: 1800.3851756	total: 5.13s	remaining: 477ms
915:	learn: 1799.7905522	total: 5.13s	remaining: 471ms
916:	learn: 1799.4320431	total: 5.14s	remaining: 465ms
917:	learn: 1799.2001094	total: 5.14s	remaining: 459ms
918:	learn: 1798.6310586	total: 5.14s	remaining: 454ms
919:	learn: 1797.7435449	total: 5.15s	remaining: 448ms
920:	learn: 1797.5094310	total: 5.15s	remaining: 442ms
921:	learn: 1796.9513802	total: 5.16s	remaining: 436ms
922:	learn: 1796.5800373	total: 5.16s	remaining: 430ms
923:	learn: 1796.2969722	total: 5.16s	remaining: 425ms
924:	learn: 1795.7591360	total: 5.17s	remaining: 419ms
925:	learn: 1795.2822373	total: 5.17s	remaining: 413ms
926:	learn: 1794.8762874	total: 5.17s	remaining: 407ms
927:	learn: 1794.5507209	total: 5.18s	remaining: 402ms
928:	learn: 1794.2487088	total: 5.18s	remaining: 396ms
929:	learn: 1793.8825939	total: 5.18s	remaining: 390ms
930:	learn: 1793.1689341	total: 5.19s	remaining: 385ms
931:	learn: 1792.8099203	total: 5.19s	remaining: 379ms
932:	learn: 1792.6308175	total: 5.2s	remaining: 373ms
933:	learn: 1792.3575632	total: 5.2s	remaining: 368ms
934:	learn: 1791.8779254	total: 5.21s	remaining: 362ms
935:	learn: 1791.6877046	total: 5.21s	remaining: 356ms
936:	learn: 1791.5026893	total: 5.22s	remaining: 351ms
937:	learn: 1791.4013407	total: 5.22s	remaining: 345ms
938:	learn: 1791.1824732	total: 5.22s	remaining: 339ms
939:	learn: 1790.7292647	total: 5.24s	remaining: 334ms
940:	learn: 1790.2089778	total: 5.24s	remaining: 329ms
941:	learn: 1789.6764870	total: 5.25s	remaining: 323ms
942:	learn: 1789.3655891	total: 5.25s	remaining: 317ms
943:	learn: 1788.9692773	total: 5.25s	remaining: 312ms
944:	learn: 1788.4461083	total: 5.26s	remaining: 306ms
945:	learn: 1788.1874135	total: 5.26s	remaining: 300ms
946:	learn: 1787.9922170	total: 5.27s	remaining: 295ms
947:	learn: 1787.4156234	total: 5.27s	remaining: 289ms
948:	learn: 1787.0741383	total: 5.28s	remaining: 284ms
949:	learn: 1786.6421431	total: 5.28s	remaining: 278ms
950:	learn: 1786.4084330	total: 5.29s	remaining: 272ms
951:	learn: 1786.2056633	total: 5.29s	remaining: 267ms
952:	learn: 1785.9851520	total: 5.29s	remaining: 261ms
953:	learn: 1785.7392602	total: 5.3s	remaining: 255ms
954:	learn: 1785.3149446	total: 5.3s	remaining: 250ms
955:	learn: 1785.1144115	total: 5.31s	remaining: 244ms
956:	learn: 1784.7228658	total: 5.31s	remaining: 239ms
957:	learn: 1784.5047009	total: 5.31s	remaining: 233ms
958:	learn: 1784.2428992	total: 5.32s	remaining: 227ms
959:	learn: 1783.6802043	total: 5.32s	remaining: 222ms
960:	learn: 1783.3047601	total: 5.33s	remaining: 216ms
961:	learn: 1783.0702734	total: 5.33s	remaining: 211ms
962:	learn: 1782.9234214	total: 5.33s	remaining: 205ms
963:	learn: 1782.7022124	total: 5.34s	remaining: 199ms
964:	learn: 1782.3334651	total: 5.34s	remaining: 194ms
965:	learn: 1782.0711641	total: 5.34s	remaining: 188ms
966:	learn: 1781.8447091	total: 5.35s	remaining: 183ms
967:	learn: 1781.1794599	total: 5.35s	remaining: 177ms
968:	learn: 1780.8719698	total: 5.36s	remaining: 171ms
969:	learn: 1780.4503615	total: 5.36s	remaining: 166ms
970:	learn: 1780.0814279	total: 5.36s	remaining: 160ms
971:	learn: 1779.8413046	total: 5.37s	remaining: 155ms
972:	learn: 1779.6169348	total: 5.37s	remaining: 149ms
973:	learn: 1779.4562521	total: 5.38s	remaining: 144ms
974:	learn: 1779.3037670	total: 5.38s	remaining: 138ms
975:	learn: 1778.8678870	total: 5.38s	remaining: 132ms
976:	learn: 1778.4888436	total: 5.39s	remaining: 127ms
977:	learn: 1778.3662453	total: 5.39s	remaining: 121ms
978:	learn: 1777.7930104	total: 5.39s	remaining: 116ms
979:	learn: 1777.4165604	total: 5.4s	remaining: 110ms
980:	learn: 1777.0851564	total: 5.4s	remaining: 105ms
981:	learn: 1777.0152936	total: 5.41s	remaining: 99.1ms
982:	learn: 1776.7253926	total: 5.41s	remaining: 93.6ms
983:	learn: 1776.1054952	total: 5.41s	remaining: 88ms
984:	learn: 1775.7898864	total: 5.42s	remaining: 82.5ms
985:	learn: 1775.4270102	total: 5.42s	remaining: 77ms
986:	learn: 1775.0761904	total: 5.42s	remaining: 71.5ms
987:	learn: 1774.5222134	total: 5.43s	remaining: 65.9ms
988:	learn: 1774.2816872	total: 5.43s	remaining: 60.4ms
989:	learn: 1773.8650257	total: 5.43s	remaining: 54.9ms
990:	learn: 1773.1775081	total: 5.44s	remaining: 49.4ms
991:	learn: 1772.8075712	total: 5.45s	remaining: 43.9ms
992:	learn: 1772.4544536	total: 5.45s	remaining: 38.4ms
993:	learn: 1772.2659066	total: 5.46s	remaining: 32.9ms
994:	learn: 1772.0462978	total: 5.46s	remaining: 27.4ms
995:	learn: 1771.2044114	total: 5.47s	remaining: 22ms
996:	learn: 1770.9048234	total: 5.47s	remaining: 16.5ms
997:	learn: 1770.4500877	total: 5.48s	remaining: 11ms
998:	learn: 1770.1346179	total: 5.48s	remaining: 5.49ms
999:	learn: 1769.8343384	total: 5.49s	remaining: 0us
<catboost.core.CatBoostRegressor object at 0x000001D85F8C9810>
In [46]:
cat_pred=cat.predict(X_test)
display (cat_pred)
array([13386.63817795, 24056.86842882, 28082.99514751, ...,
       45959.31234642, 31714.44751009,  9481.45994163])
In [47]:
display (r2_score(Y_test,cat_pred))
0.9641612028134969
In [48]:
import pickle 
# Saving model to disk
pickle.dump(cat, open('model.pkl','wb'))
In [49]:
model=pickle.load(open('model.pkl','rb'))
print (model.predict (X_train))
[14256.69390956 29475.32741129 11911.12888914 ... 21406.16876331
 17189.65426694 44839.56117235]
In [ ]: